1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ 29 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ 30 31 #include "deoptimizer.h" 32 #include "mips/lithium-gap-resolver-mips.h" 33 #include "mips/lithium-mips.h" 34 #include "safepoint-table.h" 35 #include "scopes.h" 36 #include "v8utils.h" 37 38 namespace v8 { 39 namespace internal { 40 41 // Forward declarations. 42 class LDeferredCode; 43 class SafepointGenerator; 44 45 class LCodeGen BASE_EMBEDDED { 46 public: 47 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 48 : zone_(info->zone()), 49 chunk_(static_cast<LPlatformChunk*>(chunk)), 50 masm_(assembler), 51 info_(info), 52 current_block_(-1), 53 current_instruction_(-1), 54 instructions_(chunk->instructions()), 55 deoptimizations_(4, info->zone()), 56 deopt_jump_table_(4, info->zone()), 57 deoptimization_literals_(8, info->zone()), 58 inlined_function_count_(0), 59 scope_(info->scope()), 60 status_(UNUSED), 61 translations_(info->zone()), 62 deferred_(8, info->zone()), 63 osr_pc_offset_(-1), 64 last_lazy_deopt_pc_(0), 65 frame_is_built_(false), 66 safepoints_(info->zone()), 67 resolver_(this), 68 expected_safepoint_kind_(Safepoint::kSimple), 69 old_position_(RelocInfo::kNoPosition) { 70 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 71 } 72 73 74 // Simple accessors. 75 MacroAssembler* masm() const { return masm_; } 76 CompilationInfo* info() const { return info_; } 77 Isolate* isolate() const { return info_->isolate(); } 78 Factory* factory() const { return isolate()->factory(); } 79 Heap* heap() const { return isolate()->heap(); } 80 Zone* zone() const { return zone_; } 81 82 int LookupDestination(int block_id) const { 83 return chunk()->LookupDestination(block_id); 84 } 85 86 bool IsNextEmittedBlock(int block_id) const { 87 return LookupDestination(block_id) == GetNextEmittedBlock(); 88 } 89 90 bool NeedsEagerFrame() const { 91 return GetStackSlotCount() > 0 || 92 info()->is_non_deferred_calling() || 93 !info()->IsStub() || 94 info()->requires_frame(); 95 } 96 bool NeedsDeferredFrame() const { 97 return !NeedsEagerFrame() && info()->is_deferred_calling(); 98 } 99 100 RAStatus GetRAState() const { 101 return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved; 102 } 103 104 // Support for converting LOperands to assembler types. 105 // LOperand must be a register. 106 Register ToRegister(LOperand* op) const; 107 108 // LOperand is loaded into scratch, unless already a register. 109 Register EmitLoadRegister(LOperand* op, Register scratch); 110 111 // LOperand must be a double register. 112 DoubleRegister ToDoubleRegister(LOperand* op) const; 113 114 // LOperand is loaded into dbl_scratch, unless already a double register. 115 DoubleRegister EmitLoadDoubleRegister(LOperand* op, 116 FloatRegister flt_scratch, 117 DoubleRegister dbl_scratch); 118 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 119 int32_t ToInteger32(LConstantOperand* op) const; 120 Smi* ToSmi(LConstantOperand* op) const; 121 double ToDouble(LConstantOperand* op) const; 122 Operand ToOperand(LOperand* op); 123 MemOperand ToMemOperand(LOperand* op) const; 124 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. 125 MemOperand ToHighMemOperand(LOperand* op) const; 126 127 bool IsInteger32(LConstantOperand* op) const; 128 bool IsSmi(LConstantOperand* op) const; 129 Handle<Object> ToHandle(LConstantOperand* op) const; 130 131 // Try to generate code for the entire chunk, but it may fail if the 132 // chunk contains constructs we cannot handle. Returns true if the 133 // code generation attempt succeeded. 134 bool GenerateCode(); 135 136 // Finish the code by setting stack height, safepoint, and bailout 137 // information on it. 138 void FinishCode(Handle<Code> code); 139 140 void DoDeferredNumberTagD(LNumberTagD* instr); 141 142 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 143 void DoDeferredNumberTagI(LInstruction* instr, 144 LOperand* value, 145 IntegerSignedness signedness); 146 147 void DoDeferredTaggedToI(LTaggedToI* instr); 148 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 149 void DoDeferredStackCheck(LStackCheck* instr); 150 void DoDeferredRandom(LRandom* instr); 151 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 152 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 153 void DoDeferredAllocate(LAllocate* instr); 154 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 155 Label* map_check); 156 157 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); 158 159 // Parallel move support. 160 void DoParallelMove(LParallelMove* move); 161 void DoGap(LGap* instr); 162 163 MemOperand PrepareKeyedOperand(Register key, 164 Register base, 165 bool key_is_constant, 166 int constant_key, 167 int element_size, 168 int shift_size, 169 int additional_index, 170 int additional_offset); 171 172 // Emit frame translation commands for an environment. 173 void WriteTranslation(LEnvironment* environment, Translation* translation); 174 175 // Declare methods that deal with the individual node types. 176 #define DECLARE_DO(type) void Do##type(L##type* node); 177 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 178 #undef DECLARE_DO 179 180 private: 181 enum Status { 182 UNUSED, 183 GENERATING, 184 DONE, 185 ABORTED 186 }; 187 188 bool is_unused() const { return status_ == UNUSED; } 189 bool is_generating() const { return status_ == GENERATING; } 190 bool is_done() const { return status_ == DONE; } 191 bool is_aborted() const { return status_ == ABORTED; } 192 193 StrictModeFlag strict_mode_flag() const { 194 return info()->is_classic_mode() ? kNonStrictMode : kStrictMode; 195 } 196 197 LPlatformChunk* chunk() const { return chunk_; } 198 Scope* scope() const { return scope_; } 199 HGraph* graph() const { return chunk()->graph(); } 200 201 Register scratch0() { return kLithiumScratchReg; } 202 Register scratch1() { return kLithiumScratchReg2; } 203 DoubleRegister double_scratch0() { return kLithiumScratchDouble; } 204 205 int GetNextEmittedBlock() const; 206 LInstruction* GetNextInstruction(); 207 208 void EmitClassOfTest(Label* if_true, 209 Label* if_false, 210 Handle<String> class_name, 211 Register input, 212 Register temporary, 213 Register temporary2); 214 215 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } 216 217 void Abort(BailoutReason reason); 218 void FPRINTF_CHECKING Comment(const char* format, ...); 219 220 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 221 222 // Code generation passes. Returns true if code generation should 223 // continue. 224 bool GeneratePrologue(); 225 bool GenerateBody(); 226 bool GenerateDeferredCode(); 227 bool GenerateDeoptJumpTable(); 228 bool GenerateSafepointTable(); 229 230 enum SafepointMode { 231 RECORD_SIMPLE_SAFEPOINT, 232 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 233 }; 234 235 void CallCode(Handle<Code> code, 236 RelocInfo::Mode mode, 237 LInstruction* instr); 238 239 void CallCodeGeneric(Handle<Code> code, 240 RelocInfo::Mode mode, 241 LInstruction* instr, 242 SafepointMode safepoint_mode); 243 244 void CallRuntime(const Runtime::Function* function, 245 int num_arguments, 246 LInstruction* instr); 247 248 void CallRuntime(Runtime::FunctionId id, 249 int num_arguments, 250 LInstruction* instr) { 251 const Runtime::Function* function = Runtime::FunctionForId(id); 252 CallRuntime(function, num_arguments, instr); 253 } 254 255 void CallRuntimeFromDeferred(Runtime::FunctionId id, 256 int argc, 257 LInstruction* instr); 258 259 enum A1State { 260 A1_UNINITIALIZED, 261 A1_CONTAINS_TARGET 262 }; 263 264 // Generate a direct call to a known function. Expects the function 265 // to be in a1. 266 void CallKnownFunction(Handle<JSFunction> function, 267 int formal_parameter_count, 268 int arity, 269 LInstruction* instr, 270 CallKind call_kind, 271 A1State a1_state); 272 273 void LoadHeapObject(Register result, Handle<HeapObject> object); 274 275 void RecordSafepointWithLazyDeopt(LInstruction* instr, 276 SafepointMode safepoint_mode); 277 278 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, 279 Safepoint::DeoptMode mode); 280 void DeoptimizeIf(Condition condition, 281 LEnvironment* environment, 282 Deoptimizer::BailoutType bailout_type, 283 Register src1 = zero_reg, 284 const Operand& src2 = Operand(zero_reg)); 285 void DeoptimizeIf(Condition condition, 286 LEnvironment* environment, 287 Register src1 = zero_reg, 288 const Operand& src2 = Operand(zero_reg)); 289 void ApplyCheckIf(Condition condition, 290 LBoundsCheck* check, 291 Register src1 = zero_reg, 292 const Operand& src2 = Operand(zero_reg)); 293 294 void AddToTranslation(LEnvironment* environment, 295 Translation* translation, 296 LOperand* op, 297 bool is_tagged, 298 bool is_uint32, 299 int* object_index_pointer, 300 int* dematerialized_index_pointer); 301 void RegisterDependentCodeForEmbeddedMaps(Handle<Code> code); 302 void PopulateDeoptimizationData(Handle<Code> code); 303 int DefineDeoptimizationLiteral(Handle<Object> literal); 304 305 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); 306 307 Register ToRegister(int index) const; 308 DoubleRegister ToDoubleRegister(int index) const; 309 310 void EmitIntegerMathAbs(LMathAbs* instr); 311 312 // Support for recording safepoint and position information. 313 void RecordSafepoint(LPointerMap* pointers, 314 Safepoint::Kind kind, 315 int arguments, 316 Safepoint::DeoptMode mode); 317 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); 318 void RecordSafepoint(Safepoint::DeoptMode mode); 319 void RecordSafepointWithRegisters(LPointerMap* pointers, 320 int arguments, 321 Safepoint::DeoptMode mode); 322 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers, 323 int arguments, 324 Safepoint::DeoptMode mode); 325 void RecordPosition(int position); 326 void RecordAndUpdatePosition(int position); 327 328 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 329 void EmitGoto(int block); 330 template<class InstrType> 331 void EmitBranch(InstrType instr, 332 Condition condition, 333 Register src1, 334 const Operand& src2); 335 template<class InstrType> 336 void EmitBranchF(InstrType instr, 337 Condition condition, 338 FPURegister src1, 339 FPURegister src2); 340 template<class InstrType> 341 void EmitFalseBranchF(InstrType instr, 342 Condition condition, 343 FPURegister src1, 344 FPURegister src2); 345 void EmitCmpI(LOperand* left, LOperand* right); 346 void EmitNumberUntagD(Register input, 347 DoubleRegister result, 348 bool allow_undefined_as_nan, 349 bool deoptimize_on_minus_zero, 350 LEnvironment* env, 351 NumberUntagDMode mode); 352 353 // Emits optimized code for typeof x == "y". Modifies input register. 354 // Returns the condition on which a final split to 355 // true and false label should be made, to optimize fallthrough. 356 // Returns two registers in cmp1 and cmp2 that can be used in the 357 // Branch instruction after EmitTypeofIs. 358 Condition EmitTypeofIs(Label* true_label, 359 Label* false_label, 360 Register input, 361 Handle<String> type_name, 362 Register& cmp1, 363 Operand& cmp2); 364 365 // Emits optimized code for %_IsObject(x). Preserves input register. 366 // Returns the condition on which a final split to 367 // true and false label should be made, to optimize fallthrough. 368 Condition EmitIsObject(Register input, 369 Register temp1, 370 Register temp2, 371 Label* is_not_object, 372 Label* is_object); 373 374 // Emits optimized code for %_IsString(x). Preserves input register. 375 // Returns the condition on which a final split to 376 // true and false label should be made, to optimize fallthrough. 377 Condition EmitIsString(Register input, 378 Register temp1, 379 Label* is_not_string, 380 SmiCheck check_needed); 381 382 // Emits optimized code for %_IsConstructCall(). 383 // Caller should branch on equal condition. 384 void EmitIsConstructCall(Register temp1, Register temp2); 385 386 // Emits optimized code to deep-copy the contents of statically known 387 // object graphs (e.g. object literal boilerplate). 388 void EmitDeepCopy(Handle<JSObject> object, 389 Register result, 390 Register source, 391 int* offset, 392 AllocationSiteMode mode); 393 // Emit optimized code for integer division. 394 // Inputs are signed. 395 // All registers are clobbered. 396 // If 'remainder' is no_reg, it is not computed. 397 void EmitSignedIntegerDivisionByConstant(Register result, 398 Register dividend, 399 int32_t divisor, 400 Register remainder, 401 Register scratch, 402 LEnvironment* environment); 403 404 405 void EnsureSpaceForLazyDeopt(); 406 void DoLoadKeyedExternalArray(LLoadKeyed* instr); 407 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); 408 void DoLoadKeyedFixedArray(LLoadKeyed* instr); 409 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 410 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 411 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 412 413 Zone* zone_; 414 LPlatformChunk* const chunk_; 415 MacroAssembler* const masm_; 416 CompilationInfo* const info_; 417 418 int current_block_; 419 int current_instruction_; 420 const ZoneList<LInstruction*>* instructions_; 421 ZoneList<LEnvironment*> deoptimizations_; 422 ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_; 423 ZoneList<Handle<Object> > deoptimization_literals_; 424 int inlined_function_count_; 425 Scope* const scope_; 426 Status status_; 427 TranslationBuffer translations_; 428 ZoneList<LDeferredCode*> deferred_; 429 int osr_pc_offset_; 430 int last_lazy_deopt_pc_; 431 bool frame_is_built_; 432 433 // Builder that keeps track of safepoints in the code. The table 434 // itself is emitted at the end of the generated code. 435 SafepointTableBuilder safepoints_; 436 437 // Compiler from a set of parallel moves to a sequential list of moves. 438 LGapResolver resolver_; 439 440 Safepoint::Kind expected_safepoint_kind_; 441 442 int old_position_; 443 444 class PushSafepointRegistersScope BASE_EMBEDDED { 445 public: 446 PushSafepointRegistersScope(LCodeGen* codegen, 447 Safepoint::Kind kind) 448 : codegen_(codegen) { 449 ASSERT(codegen_->info()->is_calling()); 450 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); 451 codegen_->expected_safepoint_kind_ = kind; 452 453 switch (codegen_->expected_safepoint_kind_) { 454 case Safepoint::kWithRegisters: 455 codegen_->masm_->PushSafepointRegisters(); 456 break; 457 case Safepoint::kWithRegistersAndDoubles: 458 codegen_->masm_->PushSafepointRegistersAndDoubles(); 459 break; 460 default: 461 UNREACHABLE(); 462 } 463 } 464 465 ~PushSafepointRegistersScope() { 466 Safepoint::Kind kind = codegen_->expected_safepoint_kind_; 467 ASSERT((kind & Safepoint::kWithRegisters) != 0); 468 switch (kind) { 469 case Safepoint::kWithRegisters: 470 codegen_->masm_->PopSafepointRegisters(); 471 break; 472 case Safepoint::kWithRegistersAndDoubles: 473 codegen_->masm_->PopSafepointRegistersAndDoubles(); 474 break; 475 default: 476 UNREACHABLE(); 477 } 478 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; 479 } 480 481 private: 482 LCodeGen* codegen_; 483 }; 484 485 friend class LDeferredCode; 486 friend class LEnvironment; 487 friend class SafepointGenerator; 488 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 489 }; 490 491 492 class LDeferredCode: public ZoneObject { 493 public: 494 explicit LDeferredCode(LCodeGen* codegen) 495 : codegen_(codegen), 496 external_exit_(NULL), 497 instruction_index_(codegen->current_instruction_) { 498 codegen->AddDeferredCode(this); 499 } 500 501 virtual ~LDeferredCode() { } 502 virtual void Generate() = 0; 503 virtual LInstruction* instr() = 0; 504 505 void SetExit(Label* exit) { external_exit_ = exit; } 506 Label* entry() { return &entry_; } 507 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } 508 int instruction_index() const { return instruction_index_; } 509 510 protected: 511 LCodeGen* codegen() const { return codegen_; } 512 MacroAssembler* masm() const { return codegen_->masm(); } 513 514 private: 515 LCodeGen* codegen_; 516 Label entry_; 517 Label exit_; 518 Label* external_exit_; 519 int instruction_index_; 520 }; 521 522 } } // namespace v8::internal 523 524 #endif // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ 525