1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_INSTRUCTION_SELECTOR_H_ 6 #define V8_COMPILER_INSTRUCTION_SELECTOR_H_ 7 8 #include <map> 9 10 #include "src/compiler/common-operator.h" 11 #include "src/compiler/instruction.h" 12 #include "src/compiler/instruction-scheduler.h" 13 #include "src/compiler/machine-operator.h" 14 #include "src/compiler/node.h" 15 #include "src/zone-containers.h" 16 17 namespace v8 { 18 namespace internal { 19 namespace compiler { 20 21 // Forward declarations. 22 class BasicBlock; 23 struct CallBuffer; // TODO(bmeurer): Remove this. 24 class FlagsContinuation; 25 class Linkage; 26 class OperandGenerator; 27 struct SwitchInfo; 28 29 // This struct connects nodes of parameters which are going to be pushed on the 30 // call stack with their parameter index in the call descriptor of the callee. 31 class PushParameter { 32 public: 33 PushParameter() : node_(nullptr), type_(MachineType::None()) {} 34 PushParameter(Node* node, MachineType type) : node_(node), type_(type) {} 35 36 Node* node() const { return node_; } 37 MachineType type() const { return type_; } 38 39 private: 40 Node* node_; 41 MachineType type_; 42 }; 43 44 // Instruction selection generates an InstructionSequence for a given Schedule. 45 class InstructionSelector final { 46 public: 47 // Forward declarations. 48 class Features; 49 50 enum SourcePositionMode { kCallSourcePositions, kAllSourcePositions }; 51 52 InstructionSelector( 53 Zone* zone, size_t node_count, Linkage* linkage, 54 InstructionSequence* sequence, Schedule* schedule, 55 SourcePositionTable* source_positions, 56 SourcePositionMode source_position_mode = kCallSourcePositions, 57 Features features = SupportedFeatures()); 58 59 // Visit code for the entire graph with the included schedule. 60 void SelectInstructions(); 61 62 void StartBlock(RpoNumber rpo); 63 void EndBlock(RpoNumber rpo); 64 void AddInstruction(Instruction* instr); 65 66 // =========================================================================== 67 // ============= Architecture-independent code emission methods. ============= 68 // =========================================================================== 69 70 Instruction* Emit(InstructionCode opcode, InstructionOperand output, 71 size_t temp_count = 0, InstructionOperand* temps = nullptr); 72 Instruction* Emit(InstructionCode opcode, InstructionOperand output, 73 InstructionOperand a, size_t temp_count = 0, 74 InstructionOperand* temps = nullptr); 75 Instruction* Emit(InstructionCode opcode, InstructionOperand output, 76 InstructionOperand a, InstructionOperand b, 77 size_t temp_count = 0, InstructionOperand* temps = nullptr); 78 Instruction* Emit(InstructionCode opcode, InstructionOperand output, 79 InstructionOperand a, InstructionOperand b, 80 InstructionOperand c, size_t temp_count = 0, 81 InstructionOperand* temps = nullptr); 82 Instruction* Emit(InstructionCode opcode, InstructionOperand output, 83 InstructionOperand a, InstructionOperand b, 84 InstructionOperand c, InstructionOperand d, 85 size_t temp_count = 0, InstructionOperand* temps = nullptr); 86 Instruction* Emit(InstructionCode opcode, InstructionOperand output, 87 InstructionOperand a, InstructionOperand b, 88 InstructionOperand c, InstructionOperand d, 89 InstructionOperand e, size_t temp_count = 0, 90 InstructionOperand* temps = nullptr); 91 Instruction* Emit(InstructionCode opcode, InstructionOperand output, 92 InstructionOperand a, InstructionOperand b, 93 InstructionOperand c, InstructionOperand d, 94 InstructionOperand e, InstructionOperand f, 95 size_t temp_count = 0, InstructionOperand* temps = nullptr); 96 Instruction* Emit(InstructionCode opcode, size_t output_count, 97 InstructionOperand* outputs, size_t input_count, 98 InstructionOperand* inputs, size_t temp_count = 0, 99 InstructionOperand* temps = nullptr); 100 Instruction* Emit(Instruction* instr); 101 102 // =========================================================================== 103 // ============== Architecture-independent CPU feature methods. ============== 104 // =========================================================================== 105 106 class Features final { 107 public: 108 Features() : bits_(0) {} 109 explicit Features(unsigned bits) : bits_(bits) {} 110 explicit Features(CpuFeature f) : bits_(1u << f) {} 111 Features(CpuFeature f1, CpuFeature f2) : bits_((1u << f1) | (1u << f2)) {} 112 113 bool Contains(CpuFeature f) const { return (bits_ & (1u << f)); } 114 115 private: 116 unsigned bits_; 117 }; 118 119 bool IsSupported(CpuFeature feature) const { 120 return features_.Contains(feature); 121 } 122 123 // Returns the features supported on the target platform. 124 static Features SupportedFeatures() { 125 return Features(CpuFeatures::SupportedFeatures()); 126 } 127 128 // TODO(sigurds) This should take a CpuFeatures argument. 129 static MachineOperatorBuilder::Flags SupportedMachineOperatorFlags(); 130 131 // =========================================================================== 132 // ============ Architecture-independent graph covering methods. ============= 133 // =========================================================================== 134 135 // Used in pattern matching during code generation. 136 // Check if {node} can be covered while generating code for the current 137 // instruction. A node can be covered if the {user} of the node has the only 138 // edge and the two are in the same basic block. 139 bool CanCover(Node* user, Node* node) const; 140 141 // Checks if {node} was already defined, and therefore code was already 142 // generated for it. 143 bool IsDefined(Node* node) const; 144 145 // Checks if {node} has any uses, and therefore code has to be generated for 146 // it. 147 bool IsUsed(Node* node) const; 148 149 // Checks if {node} is currently live. 150 bool IsLive(Node* node) const { return !IsDefined(node) && IsUsed(node); } 151 152 int GetVirtualRegister(const Node* node); 153 const std::map<NodeId, int> GetVirtualRegistersForTesting() const; 154 155 Isolate* isolate() const { return sequence()->isolate(); } 156 157 private: 158 friend class OperandGenerator; 159 160 void EmitTableSwitch(const SwitchInfo& sw, InstructionOperand& index_operand); 161 void EmitLookupSwitch(const SwitchInfo& sw, 162 InstructionOperand& value_operand); 163 164 // Inform the instruction selection that {node} was just defined. 165 void MarkAsDefined(Node* node); 166 167 // Inform the instruction selection that {node} has at least one use and we 168 // will need to generate code for it. 169 void MarkAsUsed(Node* node); 170 171 // Inform the register allocation of the representation of the value produced 172 // by {node}. 173 void MarkAsRepresentation(MachineRepresentation rep, Node* node); 174 void MarkAsWord32(Node* node) { 175 MarkAsRepresentation(MachineRepresentation::kWord32, node); 176 } 177 void MarkAsWord64(Node* node) { 178 MarkAsRepresentation(MachineRepresentation::kWord64, node); 179 } 180 void MarkAsFloat32(Node* node) { 181 MarkAsRepresentation(MachineRepresentation::kFloat32, node); 182 } 183 void MarkAsFloat64(Node* node) { 184 MarkAsRepresentation(MachineRepresentation::kFloat64, node); 185 } 186 void MarkAsReference(Node* node) { 187 MarkAsRepresentation(MachineRepresentation::kTagged, node); 188 } 189 190 // Inform the register allocation of the representation of the unallocated 191 // operand {op}. 192 void MarkAsRepresentation(MachineRepresentation rep, 193 const InstructionOperand& op); 194 195 enum CallBufferFlag { 196 kCallCodeImmediate = 1u << 0, 197 kCallAddressImmediate = 1u << 1, 198 kCallTail = 1u << 2 199 }; 200 typedef base::Flags<CallBufferFlag> CallBufferFlags; 201 202 // Initialize the call buffer with the InstructionOperands, nodes, etc, 203 // corresponding 204 // to the inputs and outputs of the call. 205 // {call_code_immediate} to generate immediate operands to calls of code. 206 // {call_address_immediate} to generate immediate operands to address calls. 207 void InitializeCallBuffer(Node* call, CallBuffer* buffer, 208 CallBufferFlags flags, int stack_param_delta = 0); 209 bool IsTailCallAddressImmediate(); 210 211 FrameStateDescriptor* GetFrameStateDescriptor(Node* node); 212 213 // =========================================================================== 214 // ============= Architecture-specific graph covering methods. =============== 215 // =========================================================================== 216 217 // Visit nodes in the given block and generate code. 218 void VisitBlock(BasicBlock* block); 219 220 // Visit the node for the control flow at the end of the block, generating 221 // code if necessary. 222 void VisitControl(BasicBlock* block); 223 224 // Visit the node and generate code, if any. 225 void VisitNode(Node* node); 226 227 #define DECLARE_GENERATOR(x) void Visit##x(Node* node); 228 MACHINE_OP_LIST(DECLARE_GENERATOR) 229 #undef DECLARE_GENERATOR 230 231 void VisitFinishRegion(Node* node); 232 void VisitGuard(Node* node); 233 void VisitParameter(Node* node); 234 void VisitIfException(Node* node); 235 void VisitOsrValue(Node* node); 236 void VisitPhi(Node* node); 237 void VisitProjection(Node* node); 238 void VisitConstant(Node* node); 239 void VisitCall(Node* call, BasicBlock* handler = nullptr); 240 void VisitTailCall(Node* call); 241 void VisitGoto(BasicBlock* target); 242 void VisitBranch(Node* input, BasicBlock* tbranch, BasicBlock* fbranch); 243 void VisitSwitch(Node* node, const SwitchInfo& sw); 244 void VisitDeoptimize(DeoptimizeKind kind, Node* value); 245 void VisitReturn(Node* ret); 246 void VisitThrow(Node* value); 247 248 void EmitPrepareArguments(ZoneVector<compiler::PushParameter>* arguments, 249 const CallDescriptor* descriptor, Node* node); 250 251 // =========================================================================== 252 253 Schedule* schedule() const { return schedule_; } 254 Linkage* linkage() const { return linkage_; } 255 InstructionSequence* sequence() const { return sequence_; } 256 Zone* instruction_zone() const { return sequence()->zone(); } 257 Zone* zone() const { return zone_; } 258 259 // =========================================================================== 260 261 Zone* const zone_; 262 Linkage* const linkage_; 263 InstructionSequence* const sequence_; 264 SourcePositionTable* const source_positions_; 265 SourcePositionMode const source_position_mode_; 266 Features features_; 267 Schedule* const schedule_; 268 BasicBlock* current_block_; 269 ZoneVector<Instruction*> instructions_; 270 BoolVector defined_; 271 BoolVector used_; 272 IntVector virtual_registers_; 273 InstructionScheduler* scheduler_; 274 }; 275 276 } // namespace compiler 277 } // namespace internal 278 } // namespace v8 279 280 #endif // V8_COMPILER_INSTRUCTION_SELECTOR_H_ 281