Home | History | Annotate | Download | only in wasm
      1 // Copyright 2018 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_WASM_JUMP_TABLE_ASSEMBLER_H_
      6 #define V8_WASM_JUMP_TABLE_ASSEMBLER_H_
      7 
      8 #include "src/macro-assembler.h"
      9 #include "src/wasm/wasm-code-manager.h"
     10 
     11 namespace v8 {
     12 namespace internal {
     13 namespace wasm {
     14 
     15 // The jump table is the central dispatch point for all (direct and indirect)
     16 // invocations in WebAssembly. It holds one slot per function in a module, with
     17 // each slot containing a dispatch to the currently published {WasmCode} that
     18 // corresponds to the function.
     19 //
     20 // Note that the table is split into lines of fixed size, with lines laid out
     21 // consecutively within the executable memory of the {NativeModule}. The slots
     22 // in turn are consecutive within a line, but do not cross line boundaries.
     23 //
     24 //   +- L1 -------------------+ +- L2 -------------------+ +- L3 ...
     25 //   | S1 | S2 | ... | Sn | x | | S1 | S2 | ... | Sn | x | | S1  ...
     26 //   +------------------------+ +------------------------+ +---- ...
     27 //
     28 // The above illustrates jump table lines {Li} containing slots {Si} with each
     29 // line containing {n} slots and some padding {x} for alignment purposes.
     30 class JumpTableAssembler : public TurboAssembler {
     31  public:
     32   // Translate an offset into the continuous jump table to a jump table index.
     33   static uint32_t SlotOffsetToIndex(uint32_t slot_offset) {
     34     uint32_t line_index = slot_offset / kJumpTableLineSize;
     35     uint32_t line_offset = slot_offset % kJumpTableLineSize;
     36     DCHECK_EQ(0, line_offset % kJumpTableSlotSize);
     37     return line_index * kJumpTableSlotsPerLine +
     38            line_offset / kJumpTableSlotSize;
     39   }
     40 
     41   // Translate a jump table index to an offset into the continuous jump table.
     42   static uint32_t SlotIndexToOffset(uint32_t slot_index) {
     43     uint32_t line_index = slot_index / kJumpTableSlotsPerLine;
     44     uint32_t line_offset =
     45         (slot_index % kJumpTableSlotsPerLine) * kJumpTableSlotSize;
     46     return line_index * kJumpTableLineSize + line_offset;
     47   }
     48 
     49   // Determine the size of a jump table containing the given number of slots.
     50   static constexpr uint32_t SizeForNumberOfSlots(uint32_t slot_count) {
     51     // TODO(wasm): Once the {RoundUp} utility handles non-powers of two values,
     52     // use: {RoundUp<kJumpTableSlotsPerLine>(slot_count) * kJumpTableLineSize}
     53     return ((slot_count + kJumpTableSlotsPerLine - 1) /
     54             kJumpTableSlotsPerLine) *
     55            kJumpTableLineSize;
     56   }
     57 
     58   static void EmitLazyCompileJumpSlot(Address base, uint32_t slot_index,
     59                                       uint32_t func_index,
     60                                       Address lazy_compile_target,
     61                                       WasmCode::FlushICache flush_i_cache) {
     62     Address slot = base + SlotIndexToOffset(slot_index);
     63     JumpTableAssembler jtasm(slot);
     64     jtasm.EmitLazyCompileJumpSlot(func_index, lazy_compile_target);
     65     jtasm.NopBytes(kJumpTableSlotSize - jtasm.pc_offset());
     66     if (flush_i_cache) {
     67       Assembler::FlushICache(slot, kJumpTableSlotSize);
     68     }
     69   }
     70 
     71   static void PatchJumpTableSlot(Address base, uint32_t slot_index,
     72                                  Address new_target,
     73                                  WasmCode::FlushICache flush_i_cache) {
     74     Address slot = base + SlotIndexToOffset(slot_index);
     75     JumpTableAssembler jtasm(slot);
     76     jtasm.EmitJumpSlot(new_target);
     77     jtasm.NopBytes(kJumpTableSlotSize - jtasm.pc_offset());
     78     if (flush_i_cache) {
     79       Assembler::FlushICache(slot, kJumpTableSlotSize);
     80     }
     81   }
     82 
     83  private:
     84   // Instantiate a {JumpTableAssembler} for patching.
     85   explicit JumpTableAssembler(Address slot_addr, int size = 256)
     86       : TurboAssembler(nullptr, JumpTableAssemblerOptions(),
     87                        reinterpret_cast<void*>(slot_addr), size,
     88                        CodeObjectRequired::kNo) {}
     89 
     90 // To allow concurrent patching of the jump table entries, we need to ensure
     91 // that the instruction containing the call target does not cross cache-line
     92 // boundaries. The jump table line size has been chosen to satisfy this.
     93 #if V8_TARGET_ARCH_X64
     94   static constexpr int kJumpTableLineSize = 64;
     95   static constexpr int kJumpTableSlotSize = 18;
     96 #elif V8_TARGET_ARCH_IA32
     97   static constexpr int kJumpTableLineSize = 64;
     98   static constexpr int kJumpTableSlotSize = 10;
     99 #elif V8_TARGET_ARCH_ARM
    100   static constexpr int kJumpTableLineSize = 5 * kInstrSize;
    101   static constexpr int kJumpTableSlotSize = 5 * kInstrSize;
    102 #elif V8_TARGET_ARCH_ARM64
    103   static constexpr int kJumpTableLineSize = 3 * kInstrSize;
    104   static constexpr int kJumpTableSlotSize = 3 * kInstrSize;
    105 #elif V8_TARGET_ARCH_S390X
    106   static constexpr int kJumpTableLineSize = 20;
    107   static constexpr int kJumpTableSlotSize = 20;
    108 #elif V8_TARGET_ARCH_S390
    109   static constexpr int kJumpTableLineSize = 14;
    110   static constexpr int kJumpTableSlotSize = 14;
    111 #elif V8_TARGET_ARCH_PPC64
    112   static constexpr int kJumpTableLineSize = 48;
    113   static constexpr int kJumpTableSlotSize = 48;
    114 #elif V8_TARGET_ARCH_PPC
    115   static constexpr int kJumpTableLineSize = 24;
    116   static constexpr int kJumpTableSlotSize = 24;
    117 #elif V8_TARGET_ARCH_MIPS
    118   static constexpr int kJumpTableLineSize = 6 * kInstrSize;
    119   static constexpr int kJumpTableSlotSize = 6 * kInstrSize;
    120 #elif V8_TARGET_ARCH_MIPS64
    121   static constexpr int kJumpTableLineSize = 8 * kInstrSize;
    122   static constexpr int kJumpTableSlotSize = 8 * kInstrSize;
    123 #else
    124   static constexpr int kJumpTableLineSize = 1;
    125   static constexpr int kJumpTableSlotSize = 1;
    126 #endif
    127 
    128   static constexpr int kJumpTableSlotsPerLine =
    129       kJumpTableLineSize / kJumpTableSlotSize;
    130 
    131   // {JumpTableAssembler} is never used during snapshot generation, and its code
    132   // must be independent of the code range of any isolate anyway. Just ensure
    133   // that no relocation information is recorded, there is no buffer to store it
    134   // since it is instantiated in patching mode in existing code directly.
    135   static AssemblerOptions JumpTableAssemblerOptions() {
    136     AssemblerOptions options;
    137     options.disable_reloc_info_for_patching = true;
    138     return options;
    139   }
    140 
    141   void EmitLazyCompileJumpSlot(uint32_t func_index,
    142                                Address lazy_compile_target);
    143 
    144   void EmitJumpSlot(Address target);
    145 
    146   void NopBytes(int bytes);
    147 };
    148 
    149 }  // namespace wasm
    150 }  // namespace internal
    151 }  // namespace v8
    152 
    153 #endif  // V8_WASM_JUMP_TABLE_ASSEMBLER_H_
    154