Home | History | Annotate | Download | only in wasm
      1 // Copyright 2017 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_WASM_WASM_CODE_MANAGER_H_
      6 #define V8_WASM_WASM_CODE_MANAGER_H_
      7 
      8 #include <functional>
      9 #include <list>
     10 #include <map>
     11 #include <unordered_map>
     12 #include <unordered_set>
     13 
     14 #include "src/base/macros.h"
     15 #include "src/builtins/builtins-definitions.h"
     16 #include "src/handles.h"
     17 #include "src/trap-handler/trap-handler.h"
     18 #include "src/vector.h"
     19 #include "src/wasm/module-compiler.h"
     20 #include "src/wasm/wasm-features.h"
     21 
     22 namespace v8 {
     23 namespace internal {
     24 
     25 struct CodeDesc;
     26 class Code;
     27 
     28 namespace wasm {
     29 
     30 class NativeModule;
     31 class WasmCodeManager;
     32 class WasmMemoryTracker;
     33 struct WasmModule;
     34 
     35 struct AddressRange {
     36   Address start;
     37   Address end;
     38 
     39   AddressRange(Address s, Address e) : start(s), end(e) {
     40     DCHECK_LE(start, end);
     41     DCHECK_IMPLIES(start == kNullAddress, end == kNullAddress);
     42   }
     43   AddressRange() : AddressRange(kNullAddress, kNullAddress) {}
     44 
     45   size_t size() const { return static_cast<size_t>(end - start); }
     46   bool is_empty() const { return start == end; }
     47   operator bool() const { return start == kNullAddress; }
     48 };
     49 
     50 // Sorted, disjoint and non-overlapping memory ranges. A range is of the
     51 // form [start, end). So there's no [start, end), [end, other_end),
     52 // because that should have been reduced to [start, other_end).
     53 class V8_EXPORT_PRIVATE DisjointAllocationPool final {
     54  public:
     55   DisjointAllocationPool() = default;
     56 
     57   explicit DisjointAllocationPool(AddressRange range) : ranges_({range}) {}
     58 
     59   DisjointAllocationPool(DisjointAllocationPool&& other) = default;
     60   DisjointAllocationPool& operator=(DisjointAllocationPool&& other) = default;
     61 
     62   // Merge the parameter range into this object while preserving ordering of the
     63   // ranges. The assumption is that the passed parameter is not intersecting
     64   // this object - for example, it was obtained from a previous Allocate.
     65   void Merge(AddressRange);
     66 
     67   // Allocate a contiguous range of size {size}. Return an empty pool on
     68   // failure.
     69   AddressRange Allocate(size_t size);
     70 
     71   bool IsEmpty() const { return ranges_.empty(); }
     72   const std::list<AddressRange>& ranges() const { return ranges_; }
     73 
     74  private:
     75   std::list<AddressRange> ranges_;
     76 
     77   DISALLOW_COPY_AND_ASSIGN(DisjointAllocationPool)
     78 };
     79 
     80 class V8_EXPORT_PRIVATE WasmCode final {
     81  public:
     82   enum Kind {
     83     kFunction,
     84     kWasmToJsWrapper,
     85     kLazyStub,
     86     kRuntimeStub,
     87     kInterpreterEntry,
     88     kJumpTable
     89   };
     90 
     91   // Each runtime stub is identified by an id. This id is used to reference the
     92   // stub via {RelocInfo::WASM_STUB_CALL} and gets resolved during relocation.
     93   enum RuntimeStubId {
     94 #define DEF_ENUM(Name) k##Name,
     95 #define DEF_ENUM_TRAP(Name) kThrowWasm##Name,
     96     WASM_RUNTIME_STUB_LIST(DEF_ENUM, DEF_ENUM_TRAP)
     97 #undef DEF_ENUM_TRAP
     98 #undef DEF_ENUM
     99         kRuntimeStubCount
    100   };
    101 
    102   // kOther is used if we have WasmCode that is neither
    103   // liftoff- nor turbofan-compiled, i.e. if Kind is
    104   // not a kFunction.
    105   enum Tier : int8_t { kLiftoff, kTurbofan, kOther };
    106 
    107   Vector<byte> instructions() const { return instructions_; }
    108   Address instruction_start() const {
    109     return reinterpret_cast<Address>(instructions_.start());
    110   }
    111   Vector<const byte> reloc_info() const { return reloc_info_.as_vector(); }
    112   Vector<const byte> source_positions() const {
    113     return source_position_table_.as_vector();
    114   }
    115 
    116   uint32_t index() const { return index_.ToChecked(); }
    117   // Anonymous functions are functions that don't carry an index.
    118   bool IsAnonymous() const { return index_.IsNothing(); }
    119   Kind kind() const { return kind_; }
    120   NativeModule* native_module() const { return native_module_; }
    121   Tier tier() const { return tier_; }
    122   Address constant_pool() const;
    123   size_t constant_pool_offset() const { return constant_pool_offset_; }
    124   size_t safepoint_table_offset() const { return safepoint_table_offset_; }
    125   size_t handler_table_offset() const { return handler_table_offset_; }
    126   uint32_t stack_slots() const { return stack_slots_; }
    127   bool is_liftoff() const { return tier_ == kLiftoff; }
    128   bool contains(Address pc) const {
    129     return reinterpret_cast<Address>(instructions_.start()) <= pc &&
    130            pc < reinterpret_cast<Address>(instructions_.end());
    131   }
    132 
    133   Vector<trap_handler::ProtectedInstructionData> protected_instructions()
    134       const {
    135     return protected_instructions_.as_vector();
    136   }
    137 
    138   void Validate() const;
    139   void Print(const char* name = nullptr) const;
    140   void Disassemble(const char* name, std::ostream& os,
    141                    Address current_pc = kNullAddress) const;
    142 
    143   static bool ShouldBeLogged(Isolate* isolate);
    144   void LogCode(Isolate* isolate) const;
    145 
    146   ~WasmCode();
    147 
    148   enum FlushICache : bool { kFlushICache = true, kNoFlushICache = false };
    149 
    150  private:
    151   friend class NativeModule;
    152 
    153   WasmCode(NativeModule* native_module, Maybe<uint32_t> index,
    154            Vector<byte> instructions, uint32_t stack_slots,
    155            size_t safepoint_table_offset, size_t handler_table_offset,
    156            size_t constant_pool_offset,
    157            OwnedVector<trap_handler::ProtectedInstructionData>
    158                protected_instructions,
    159            OwnedVector<const byte> reloc_info,
    160            OwnedVector<const byte> source_position_table, Kind kind, Tier tier)
    161       : instructions_(instructions),
    162         reloc_info_(std::move(reloc_info)),
    163         source_position_table_(std::move(source_position_table)),
    164         native_module_(native_module),
    165         index_(index),
    166         kind_(kind),
    167         constant_pool_offset_(constant_pool_offset),
    168         stack_slots_(stack_slots),
    169         safepoint_table_offset_(safepoint_table_offset),
    170         handler_table_offset_(handler_table_offset),
    171         protected_instructions_(std::move(protected_instructions)),
    172         tier_(tier) {
    173     DCHECK_LE(safepoint_table_offset, instructions.size());
    174     DCHECK_LE(constant_pool_offset, instructions.size());
    175     DCHECK_LE(handler_table_offset, instructions.size());
    176   }
    177 
    178   // Code objects that have been registered with the global trap handler within
    179   // this process, will have a {trap_handler_index} associated with them.
    180   size_t trap_handler_index() const;
    181   void set_trap_handler_index(size_t);
    182   bool HasTrapHandlerIndex() const;
    183 
    184   // Register protected instruction information with the trap handler. Sets
    185   // trap_handler_index.
    186   void RegisterTrapHandlerData();
    187 
    188   Vector<byte> instructions_;
    189   OwnedVector<const byte> reloc_info_;
    190   OwnedVector<const byte> source_position_table_;
    191   NativeModule* native_module_ = nullptr;
    192   Maybe<uint32_t> index_;
    193   Kind kind_;
    194   size_t constant_pool_offset_ = 0;
    195   uint32_t stack_slots_ = 0;
    196   // we care about safepoint data for wasm-to-js functions,
    197   // since there may be stack/register tagged values for large number
    198   // conversions.
    199   size_t safepoint_table_offset_ = 0;
    200   size_t handler_table_offset_ = 0;
    201   intptr_t trap_handler_index_ = -1;
    202   OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions_;
    203   Tier tier_;
    204 
    205   DISALLOW_COPY_AND_ASSIGN(WasmCode);
    206 };
    207 
    208 // Return a textual description of the kind.
    209 const char* GetWasmCodeKindAsString(WasmCode::Kind);
    210 
    211 class V8_EXPORT_PRIVATE NativeModule final {
    212  public:
    213 #if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
    214   static constexpr bool kCanAllocateMoreMemory = false;
    215 #else
    216   static constexpr bool kCanAllocateMoreMemory = true;
    217 #endif
    218 
    219   // {AddCode} is thread safe w.r.t. other calls to {AddCode} or {AddCodeCopy},
    220   // i.e. it can be called concurrently from background threads.
    221   WasmCode* AddCode(uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
    222                     size_t safepoint_table_offset, size_t handler_table_offset,
    223                     OwnedVector<trap_handler::ProtectedInstructionData>
    224                         protected_instructions,
    225                     OwnedVector<const byte> source_position_table,
    226                     WasmCode::Tier tier);
    227 
    228   WasmCode* AddDeserializedCode(
    229       uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
    230       size_t safepoint_table_offset, size_t handler_table_offset,
    231       size_t constant_pool_offset,
    232       OwnedVector<trap_handler::ProtectedInstructionData>
    233           protected_instructions,
    234       OwnedVector<const byte> reloc_info,
    235       OwnedVector<const byte> source_position_table, WasmCode::Tier tier);
    236 
    237   // A way to copy over JS-allocated code. This is because we compile
    238   // certain wrappers using a different pipeline.
    239   WasmCode* AddCodeCopy(Handle<Code> code, WasmCode::Kind kind, uint32_t index);
    240 
    241   // Add an interpreter entry. For the same reason as AddCodeCopy, we
    242   // currently compile these using a different pipeline and we can't get a
    243   // CodeDesc here. When adding interpreter wrappers, we do not insert them in
    244   // the code_table, however, we let them self-identify as the {index} function.
    245   WasmCode* AddInterpreterEntry(Handle<Code> code, uint32_t index);
    246 
    247   // When starting lazy compilation, provide the WasmLazyCompile builtin by
    248   // calling SetLazyBuiltin. It will be copied into this NativeModule and the
    249   // jump table will be populated with that copy.
    250   void SetLazyBuiltin(Handle<Code> code);
    251 
    252   // Initializes all runtime stubs by copying them over from the JS-allocated
    253   // heap into this native module. It must be called exactly once per native
    254   // module before adding other WasmCode so that runtime stub ids can be
    255   // resolved during relocation.
    256   void SetRuntimeStubs(Isolate* isolate);
    257 
    258   // Makes the code available to the system (by entering it into the code table
    259   // and patching the jump table). Callers have to take care not to race with
    260   // threads executing the old code.
    261   void PublishCode(WasmCode* code);
    262 
    263   // Creates a snapshot of the current state of the code table. This is useful
    264   // to get a consistent view of the table (e.g. used by the serializer).
    265   std::vector<WasmCode*> SnapshotCodeTable() const;
    266 
    267   WasmCode* code(uint32_t index) const {
    268     DCHECK_LT(index, num_functions());
    269     DCHECK_LE(module_->num_imported_functions, index);
    270     return code_table_[index - module_->num_imported_functions];
    271   }
    272 
    273   bool has_code(uint32_t index) const { return code(index) != nullptr; }
    274 
    275   WasmCode* runtime_stub(WasmCode::RuntimeStubId index) const {
    276     DCHECK_LT(index, WasmCode::kRuntimeStubCount);
    277     WasmCode* code = runtime_stub_table_[index];
    278     DCHECK_NOT_NULL(code);
    279     return code;
    280   }
    281 
    282   Address jump_table_start() const {
    283     return jump_table_ ? jump_table_->instruction_start() : kNullAddress;
    284   }
    285 
    286   ptrdiff_t jump_table_offset(uint32_t func_index) const {
    287     DCHECK_GE(func_index, num_imported_functions());
    288     return GetCallTargetForFunction(func_index) - jump_table_start();
    289   }
    290 
    291   bool is_jump_table_slot(Address address) const {
    292     return jump_table_->contains(address);
    293   }
    294 
    295   // Transition this module from code relying on trap handlers (i.e. without
    296   // explicit memory bounds checks) to code that does not require trap handlers
    297   // (i.e. code with explicit bounds checks).
    298   // This method must only be called if {use_trap_handler()} is true (it will be
    299   // false afterwards). All code in this {NativeModule} needs to be re-added
    300   // after calling this method.
    301   void DisableTrapHandler();
    302 
    303   // Returns the target to call for the given function (returns a jump table
    304   // slot within {jump_table_}).
    305   Address GetCallTargetForFunction(uint32_t func_index) const;
    306 
    307   // Reverse lookup from a given call target (i.e. a jump table slot as the
    308   // above {GetCallTargetForFunction} returns) to a function index.
    309   uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address) const;
    310 
    311   bool SetExecutable(bool executable);
    312 
    313   // For cctests, where we build both WasmModule and the runtime objects
    314   // on the fly, and bypass the instance builder pipeline.
    315   void ReserveCodeTableForTesting(uint32_t max_functions);
    316 
    317   void LogWasmCodes(Isolate* isolate);
    318 
    319   CompilationState* compilation_state() { return compilation_state_.get(); }
    320 
    321   uint32_t num_functions() const {
    322     return module_->num_declared_functions + module_->num_imported_functions;
    323   }
    324   uint32_t num_imported_functions() const {
    325     return module_->num_imported_functions;
    326   }
    327   bool use_trap_handler() const { return use_trap_handler_; }
    328   void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
    329   bool lazy_compile_frozen() const { return lazy_compile_frozen_; }
    330   Vector<const byte> wire_bytes() const { return wire_bytes_.as_vector(); }
    331   void set_wire_bytes(OwnedVector<const byte> wire_bytes) {
    332     wire_bytes_ = std::move(wire_bytes);
    333   }
    334   const WasmModule* module() const { return module_.get(); }
    335   WasmCodeManager* code_manager() const { return wasm_code_manager_; }
    336 
    337   WasmCode* Lookup(Address) const;
    338 
    339   ~NativeModule();
    340 
    341   const WasmFeatures& enabled_features() const { return enabled_features_; }
    342 
    343  private:
    344   friend class WasmCode;
    345   friend class WasmCodeManager;
    346   friend class NativeModuleModificationScope;
    347 
    348   NativeModule(Isolate* isolate, const WasmFeatures& enabled_features,
    349                bool can_request_more, VirtualMemory* code_space,
    350                WasmCodeManager* code_manager,
    351                std::shared_ptr<const WasmModule> module, const ModuleEnv& env);
    352 
    353   WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind);
    354   Address AllocateForCode(size_t size);
    355 
    356   // Primitive for adding code to the native module. All code added to a native
    357   // module is owned by that module. Various callers get to decide on how the
    358   // code is obtained (CodeDesc vs, as a point in time, Code*), the kind,
    359   // whether it has an index or is anonymous, etc.
    360   WasmCode* AddOwnedCode(Maybe<uint32_t> index, Vector<const byte> instructions,
    361                          uint32_t stack_slots, size_t safepoint_table_offset,
    362                          size_t handler_table_offset,
    363                          size_t constant_pool_offset,
    364                          OwnedVector<trap_handler::ProtectedInstructionData>,
    365                          OwnedVector<const byte> reloc_info,
    366                          OwnedVector<const byte> source_position_table,
    367                          WasmCode::Kind, WasmCode::Tier);
    368 
    369   WasmCode* CreateEmptyJumpTable(uint32_t num_wasm_functions);
    370 
    371   void PatchJumpTable(uint32_t func_index, Address target,
    372                       WasmCode::FlushICache);
    373 
    374   Vector<WasmCode*> code_table() const {
    375     return {code_table_.get(), module_->num_declared_functions};
    376   }
    377   void set_code(uint32_t index, WasmCode* code) {
    378     DCHECK_LT(index, num_functions());
    379     DCHECK_LE(module_->num_imported_functions, index);
    380     DCHECK_EQ(code->index(), index);
    381     code_table_[index - module_->num_imported_functions] = code;
    382   }
    383 
    384   // Features enabled for this module. We keep a copy of the features that
    385   // were enabled at the time of the creation of this native module,
    386   // to be consistent across asynchronous compilations later.
    387   const WasmFeatures enabled_features_;
    388 
    389   // TODO(clemensh): Make this a unique_ptr (requires refactoring
    390   // AsyncCompileJob).
    391   std::shared_ptr<const WasmModule> module_;
    392 
    393   // Holds all allocated code objects, is maintained to be in ascending order
    394   // according to the codes instruction start address to allow lookups.
    395   std::vector<std::unique_ptr<WasmCode>> owned_code_;
    396 
    397   std::unique_ptr<WasmCode* []> code_table_;
    398 
    399   OwnedVector<const byte> wire_bytes_;
    400 
    401   WasmCode* runtime_stub_table_[WasmCode::kRuntimeStubCount] = {nullptr};
    402 
    403   // Jump table used to easily redirect wasm function calls.
    404   WasmCode* jump_table_ = nullptr;
    405 
    406   // The compilation state keeps track of compilation tasks for this module.
    407   // Note that its destructor blocks until all tasks are finished/aborted and
    408   // hence needs to be destructed first when this native module dies.
    409   std::unique_ptr<CompilationState, CompilationStateDeleter> compilation_state_;
    410 
    411   // This mutex protects concurrent calls to {AddCode} and {AddCodeCopy}.
    412   mutable base::Mutex allocation_mutex_;
    413 
    414   DisjointAllocationPool free_code_space_;
    415   DisjointAllocationPool allocated_code_space_;
    416   std::list<VirtualMemory> owned_code_space_;
    417 
    418   WasmCodeManager* wasm_code_manager_;
    419   std::atomic<size_t> committed_code_space_{0};
    420   int modification_scope_depth_ = 0;
    421   bool can_request_more_memory_;
    422   bool use_trap_handler_ = false;
    423   bool is_executable_ = false;
    424   bool lazy_compile_frozen_ = false;
    425 
    426   DISALLOW_COPY_AND_ASSIGN(NativeModule);
    427 };
    428 
    429 class V8_EXPORT_PRIVATE WasmCodeManager final {
    430  public:
    431   explicit WasmCodeManager(WasmMemoryTracker* memory_tracker,
    432                            size_t max_committed);
    433   // Create a new NativeModule. The caller is responsible for its
    434   // lifetime. The native module will be given some memory for code,
    435   // which will be page size aligned. The size of the initial memory
    436   // is determined with a heuristic based on the total size of wasm
    437   // code. The native module may later request more memory.
    438   // TODO(titzer): isolate is only required here for CompilationState.
    439   std::unique_ptr<NativeModule> NewNativeModule(
    440       Isolate* isolate, const WasmFeatures& enabled_features,
    441       size_t memory_estimate, bool can_request_more,
    442       std::shared_ptr<const WasmModule> module, const ModuleEnv& env);
    443 
    444   NativeModule* LookupNativeModule(Address pc) const;
    445   WasmCode* LookupCode(Address pc) const;
    446   WasmCode* GetCodeFromStartAddress(Address pc) const;
    447   size_t remaining_uncommitted_code_space() const;
    448 
    449   // Add a sample of all module sizes.
    450   void SampleModuleSizes(Isolate* isolate) const;
    451 
    452   // TODO(v8:7424): For now we sample module sizes in a GC callback. This will
    453   // bias samples towards apps with high memory pressure. We should switch to
    454   // using sampling based on regular intervals independent of the GC.
    455   static void InstallSamplingGCCallback(Isolate* isolate);
    456 
    457   static size_t EstimateNativeModuleSize(const WasmModule* module);
    458 
    459  private:
    460   friend class NativeModule;
    461 
    462   void TryAllocate(size_t size, VirtualMemory*, void* hint = nullptr);
    463   bool Commit(Address, size_t);
    464   // Currently, we uncommit a whole module, so all we need is account
    465   // for the freed memory size. We do that in FreeNativeModule.
    466   // There's no separate Uncommit.
    467 
    468   void FreeNativeModule(NativeModule*);
    469   void Free(VirtualMemory* mem);
    470   void AssignRanges(Address start, Address end, NativeModule*);
    471   bool ShouldForceCriticalMemoryPressureNotification();
    472 
    473   WasmMemoryTracker* const memory_tracker_;
    474   mutable base::Mutex native_modules_mutex_;
    475   std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
    476   std::unordered_set<NativeModule*> native_modules_;
    477   std::atomic<size_t> remaining_uncommitted_code_space_;
    478 
    479   DISALLOW_COPY_AND_ASSIGN(WasmCodeManager);
    480 };
    481 
    482 // Within the scope, the native_module is writable and not executable.
    483 // At the scope's destruction, the native_module is executable and not writable.
    484 // The states inside the scope and at the scope termination are irrespective of
    485 // native_module's state when entering the scope.
    486 // We currently mark the entire module's memory W^X:
    487 //  - for AOT, that's as efficient as it can be.
    488 //  - for Lazy, we don't have a heuristic for functions that may need patching,
    489 //    and even if we did, the resulting set of pages may be fragmented.
    490 //    Currently, we try and keep the number of syscalls low.
    491 // -  similar argument for debug time.
    492 class NativeModuleModificationScope final {
    493  public:
    494   explicit NativeModuleModificationScope(NativeModule* native_module);
    495   ~NativeModuleModificationScope();
    496 
    497  private:
    498   NativeModule* native_module_;
    499 };
    500 
    501 }  // namespace wasm
    502 }  // namespace internal
    503 }  // namespace v8
    504 
    505 #endif  // V8_WASM_WASM_CODE_MANAGER_H_
    506