1 // Copyright 2010 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_X64_CODEGEN_X64_H_ 29 #define V8_X64_CODEGEN_X64_H_ 30 31 namespace v8 { 32 namespace internal { 33 34 // Forward declarations 35 class CompilationInfo; 36 class DeferredCode; 37 class RegisterAllocator; 38 class RegisterFile; 39 40 enum InitState { CONST_INIT, NOT_CONST_INIT }; 41 enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF }; 42 43 44 // ------------------------------------------------------------------------- 45 // Reference support 46 47 // A reference is a C++ stack-allocated object that puts a 48 // reference on the virtual frame. The reference may be consumed 49 // by GetValue, TakeValue, SetValue, and Codegen::UnloadReference. 50 // When the lifetime (scope) of a valid reference ends, it must have 51 // been consumed, and be in state UNLOADED. 52 class Reference BASE_EMBEDDED { 53 public: 54 // The values of the types is important, see size(). 55 enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 }; 56 57 Reference(CodeGenerator* cgen, 58 Expression* expression, 59 bool persist_after_get = false); 60 ~Reference(); 61 62 Expression* expression() const { return expression_; } 63 Type type() const { return type_; } 64 void set_type(Type value) { 65 ASSERT_EQ(ILLEGAL, type_); 66 type_ = value; 67 } 68 69 void set_unloaded() { 70 ASSERT_NE(ILLEGAL, type_); 71 ASSERT_NE(UNLOADED, type_); 72 type_ = UNLOADED; 73 } 74 // The size the reference takes up on the stack. 75 int size() const { 76 return (type_ < SLOT) ? 0 : type_; 77 } 78 79 bool is_illegal() const { return type_ == ILLEGAL; } 80 bool is_slot() const { return type_ == SLOT; } 81 bool is_property() const { return type_ == NAMED || type_ == KEYED; } 82 bool is_unloaded() const { return type_ == UNLOADED; } 83 84 // Return the name. Only valid for named property references. 85 Handle<String> GetName(); 86 87 // Generate code to push the value of the reference on top of the 88 // expression stack. The reference is expected to be already on top of 89 // the expression stack, and it is consumed by the call unless the 90 // reference is for a compound assignment. 91 // If the reference is not consumed, it is left in place under its value. 92 void GetValue(); 93 94 // Like GetValue except that the slot is expected to be written to before 95 // being read from again. The value of the reference may be invalidated, 96 // causing subsequent attempts to read it to fail. 97 void TakeValue(); 98 99 // Generate code to store the value on top of the expression stack in the 100 // reference. The reference is expected to be immediately below the value 101 // on the expression stack. The value is stored in the location specified 102 // by the reference, and is left on top of the stack, after the reference 103 // is popped from beneath it (unloaded). 104 void SetValue(InitState init_state); 105 106 private: 107 CodeGenerator* cgen_; 108 Expression* expression_; 109 Type type_; 110 bool persist_after_get_; 111 }; 112 113 114 // ------------------------------------------------------------------------- 115 // Control destinations. 116 117 // A control destination encapsulates a pair of jump targets and a 118 // flag indicating which one is the preferred fall-through. The 119 // preferred fall-through must be unbound, the other may be already 120 // bound (ie, a backward target). 121 // 122 // The true and false targets may be jumped to unconditionally or 123 // control may split conditionally. Unconditional jumping and 124 // splitting should be emitted in tail position (as the last thing 125 // when compiling an expression) because they can cause either label 126 // to be bound or the non-fall through to be jumped to leaving an 127 // invalid virtual frame. 128 // 129 // The labels in the control destination can be extracted and 130 // manipulated normally without affecting the state of the 131 // destination. 132 133 class ControlDestination BASE_EMBEDDED { 134 public: 135 ControlDestination(JumpTarget* true_target, 136 JumpTarget* false_target, 137 bool true_is_fall_through) 138 : true_target_(true_target), 139 false_target_(false_target), 140 true_is_fall_through_(true_is_fall_through), 141 is_used_(false) { 142 ASSERT(true_is_fall_through ? !true_target->is_bound() 143 : !false_target->is_bound()); 144 } 145 146 // Accessors for the jump targets. Directly jumping or branching to 147 // or binding the targets will not update the destination's state. 148 JumpTarget* true_target() const { return true_target_; } 149 JumpTarget* false_target() const { return false_target_; } 150 151 // True if the the destination has been jumped to unconditionally or 152 // control has been split to both targets. This predicate does not 153 // test whether the targets have been extracted and manipulated as 154 // raw jump targets. 155 bool is_used() const { return is_used_; } 156 157 // True if the destination is used and the true target (respectively 158 // false target) was the fall through. If the target is backward, 159 // "fall through" included jumping unconditionally to it. 160 bool true_was_fall_through() const { 161 return is_used_ && true_is_fall_through_; 162 } 163 164 bool false_was_fall_through() const { 165 return is_used_ && !true_is_fall_through_; 166 } 167 168 // Emit a branch to one of the true or false targets, and bind the 169 // other target. Because this binds the fall-through target, it 170 // should be emitted in tail position (as the last thing when 171 // compiling an expression). 172 void Split(Condition cc) { 173 ASSERT(!is_used_); 174 if (true_is_fall_through_) { 175 false_target_->Branch(NegateCondition(cc)); 176 true_target_->Bind(); 177 } else { 178 true_target_->Branch(cc); 179 false_target_->Bind(); 180 } 181 is_used_ = true; 182 } 183 184 // Emit an unconditional jump in tail position, to the true target 185 // (if the argument is true) or the false target. The "jump" will 186 // actually bind the jump target if it is forward, jump to it if it 187 // is backward. 188 void Goto(bool where) { 189 ASSERT(!is_used_); 190 JumpTarget* target = where ? true_target_ : false_target_; 191 if (target->is_bound()) { 192 target->Jump(); 193 } else { 194 target->Bind(); 195 } 196 is_used_ = true; 197 true_is_fall_through_ = where; 198 } 199 200 // Mark this jump target as used as if Goto had been called, but 201 // without generating a jump or binding a label (the control effect 202 // should have already happened). This is used when the left 203 // subexpression of the short-circuit boolean operators are 204 // compiled. 205 void Use(bool where) { 206 ASSERT(!is_used_); 207 ASSERT((where ? true_target_ : false_target_)->is_bound()); 208 is_used_ = true; 209 true_is_fall_through_ = where; 210 } 211 212 // Swap the true and false targets but keep the same actual label as 213 // the fall through. This is used when compiling negated 214 // expressions, where we want to swap the targets but preserve the 215 // state. 216 void Invert() { 217 JumpTarget* temp_target = true_target_; 218 true_target_ = false_target_; 219 false_target_ = temp_target; 220 221 true_is_fall_through_ = !true_is_fall_through_; 222 } 223 224 private: 225 // True and false jump targets. 226 JumpTarget* true_target_; 227 JumpTarget* false_target_; 228 229 // Before using the destination: true if the true target is the 230 // preferred fall through, false if the false target is. After 231 // using the destination: true if the true target was actually used 232 // as the fall through, false if the false target was. 233 bool true_is_fall_through_; 234 235 // True if the Split or Goto functions have been called. 236 bool is_used_; 237 }; 238 239 240 // ------------------------------------------------------------------------- 241 // Code generation state 242 243 // The state is passed down the AST by the code generator (and back up, in 244 // the form of the state of the jump target pair). It is threaded through 245 // the call stack. Constructing a state implicitly pushes it on the owning 246 // code generator's stack of states, and destroying one implicitly pops it. 247 // 248 // The code generator state is only used for expressions, so statements have 249 // the initial state. 250 251 class CodeGenState BASE_EMBEDDED { 252 public: 253 // Create an initial code generator state. Destroying the initial state 254 // leaves the code generator with a NULL state. 255 explicit CodeGenState(CodeGenerator* owner); 256 257 // Create a code generator state based on a code generator's current 258 // state. The new state has its own control destination. 259 CodeGenState(CodeGenerator* owner, ControlDestination* destination); 260 261 // Destroy a code generator state and restore the owning code generator's 262 // previous state. 263 ~CodeGenState(); 264 265 // Accessors for the state. 266 ControlDestination* destination() const { return destination_; } 267 268 private: 269 // The owning code generator. 270 CodeGenerator* owner_; 271 272 // A control destination in case the expression has a control-flow 273 // effect. 274 ControlDestination* destination_; 275 276 // The previous state of the owning code generator, restored when 277 // this state is destroyed. 278 CodeGenState* previous_; 279 }; 280 281 282 // ------------------------------------------------------------------------- 283 // Arguments allocation mode 284 285 enum ArgumentsAllocationMode { 286 NO_ARGUMENTS_ALLOCATION, 287 EAGER_ARGUMENTS_ALLOCATION, 288 LAZY_ARGUMENTS_ALLOCATION 289 }; 290 291 292 // ------------------------------------------------------------------------- 293 // CodeGenerator 294 295 class CodeGenerator: public AstVisitor { 296 public: 297 // Takes a function literal, generates code for it. This function should only 298 // be called by compiler.cc. 299 static Handle<Code> MakeCode(CompilationInfo* info); 300 301 // Printing of AST, etc. as requested by flags. 302 static void MakeCodePrologue(CompilationInfo* info); 303 304 // Allocate and install the code. 305 static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm, 306 Code::Flags flags, 307 CompilationInfo* info); 308 309 #ifdef ENABLE_LOGGING_AND_PROFILING 310 static bool ShouldGenerateLog(Expression* type); 311 #endif 312 313 static void RecordPositions(MacroAssembler* masm, int pos); 314 315 // Accessors 316 MacroAssembler* masm() { return masm_; } 317 VirtualFrame* frame() const { return frame_; } 318 inline Handle<Script> script(); 319 320 bool has_valid_frame() const { return frame_ != NULL; } 321 322 // Set the virtual frame to be new_frame, with non-frame register 323 // reference counts given by non_frame_registers. The non-frame 324 // register reference counts of the old frame are returned in 325 // non_frame_registers. 326 void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers); 327 328 void DeleteFrame(); 329 330 RegisterAllocator* allocator() const { return allocator_; } 331 332 CodeGenState* state() { return state_; } 333 void set_state(CodeGenState* state) { state_ = state; } 334 335 void AddDeferred(DeferredCode* code) { deferred_.Add(code); } 336 337 bool in_spilled_code() const { return in_spilled_code_; } 338 void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; } 339 340 private: 341 // Construction/Destruction 342 explicit CodeGenerator(MacroAssembler* masm); 343 344 // Accessors 345 inline bool is_eval(); 346 Scope* scope(); 347 348 // Generating deferred code. 349 void ProcessDeferred(); 350 351 // State 352 ControlDestination* destination() const { return state_->destination(); } 353 354 // Track loop nesting level. 355 int loop_nesting() const { return loop_nesting_; } 356 void IncrementLoopNesting() { loop_nesting_++; } 357 void DecrementLoopNesting() { loop_nesting_--; } 358 359 360 // Node visitors. 361 void VisitStatements(ZoneList<Statement*>* statements); 362 363 #define DEF_VISIT(type) \ 364 void Visit##type(type* node); 365 AST_NODE_LIST(DEF_VISIT) 366 #undef DEF_VISIT 367 368 // Visit a statement and then spill the virtual frame if control flow can 369 // reach the end of the statement (ie, it does not exit via break, 370 // continue, return, or throw). This function is used temporarily while 371 // the code generator is being transformed. 372 void VisitAndSpill(Statement* statement); 373 374 // Visit a list of statements and then spill the virtual frame if control 375 // flow can reach the end of the list. 376 void VisitStatementsAndSpill(ZoneList<Statement*>* statements); 377 378 // Main code generation function 379 void Generate(CompilationInfo* info); 380 381 // Generate the return sequence code. Should be called no more than 382 // once per compiled function, immediately after binding the return 383 // target (which can not be done more than once). 384 void GenerateReturnSequence(Result* return_value); 385 386 // Returns the arguments allocation mode. 387 ArgumentsAllocationMode ArgumentsMode(); 388 389 // Store the arguments object and allocate it if necessary. 390 Result StoreArgumentsObject(bool initial); 391 392 // The following are used by class Reference. 393 void LoadReference(Reference* ref); 394 void UnloadReference(Reference* ref); 395 396 static Operand ContextOperand(Register context, int index) { 397 return Operand(context, Context::SlotOffset(index)); 398 } 399 400 Operand SlotOperand(Slot* slot, Register tmp); 401 402 Operand ContextSlotOperandCheckExtensions(Slot* slot, 403 Result tmp, 404 JumpTarget* slow); 405 406 // Expressions 407 static Operand GlobalObject() { 408 return ContextOperand(rsi, Context::GLOBAL_INDEX); 409 } 410 411 void LoadCondition(Expression* x, 412 ControlDestination* destination, 413 bool force_control); 414 void Load(Expression* expr); 415 void LoadGlobal(); 416 void LoadGlobalReceiver(); 417 418 // Generate code to push the value of an expression on top of the frame 419 // and then spill the frame fully to memory. This function is used 420 // temporarily while the code generator is being transformed. 421 void LoadAndSpill(Expression* expression); 422 423 // Read a value from a slot and leave it on top of the expression stack. 424 void LoadFromSlot(Slot* slot, TypeofState typeof_state); 425 void LoadFromSlotCheckForArguments(Slot* slot, TypeofState state); 426 Result LoadFromGlobalSlotCheckExtensions(Slot* slot, 427 TypeofState typeof_state, 428 JumpTarget* slow); 429 430 // Store the value on top of the expression stack into a slot, leaving the 431 // value in place. 432 void StoreToSlot(Slot* slot, InitState init_state); 433 434 // Load a property of an object, returning it in a Result. 435 // The object and the property name are passed on the stack, and 436 // not changed. 437 Result EmitKeyedLoad(bool is_global); 438 439 // Special code for typeof expressions: Unfortunately, we must 440 // be careful when loading the expression in 'typeof' 441 // expressions. We are not allowed to throw reference errors for 442 // non-existing properties of the global object, so we must make it 443 // look like an explicit property access, instead of an access 444 // through the context chain. 445 void LoadTypeofExpression(Expression* x); 446 447 // Translate the value on top of the frame into control flow to the 448 // control destination. 449 void ToBoolean(ControlDestination* destination); 450 451 void GenericBinaryOperation( 452 Token::Value op, 453 StaticType* type, 454 OverwriteMode overwrite_mode); 455 456 // If possible, combine two constant smi values using op to produce 457 // a smi result, and push it on the virtual frame, all at compile time. 458 // Returns true if it succeeds. Otherwise it has no effect. 459 bool FoldConstantSmis(Token::Value op, int left, int right); 460 461 // Emit code to perform a binary operation on a constant 462 // smi and a likely smi. Consumes the Result *operand. 463 Result ConstantSmiBinaryOperation(Token::Value op, 464 Result* operand, 465 Handle<Object> constant_operand, 466 StaticType* type, 467 bool reversed, 468 OverwriteMode overwrite_mode); 469 470 // Emit code to perform a binary operation on two likely smis. 471 // The code to handle smi arguments is produced inline. 472 // Consumes the Results *left and *right. 473 Result LikelySmiBinaryOperation(Token::Value op, 474 Result* left, 475 Result* right, 476 OverwriteMode overwrite_mode); 477 478 void Comparison(AstNode* node, 479 Condition cc, 480 bool strict, 481 ControlDestination* destination); 482 483 // To prevent long attacker-controlled byte sequences, integer constants 484 // from the JavaScript source are loaded in two parts if they are larger 485 // than 16 bits. 486 static const int kMaxSmiInlinedBits = 16; 487 bool IsUnsafeSmi(Handle<Object> value); 488 // Load an integer constant x into a register target using 489 // at most 16 bits of user-controlled data per assembly operation. 490 void LoadUnsafeSmi(Register target, Handle<Object> value); 491 492 void CallWithArguments(ZoneList<Expression*>* arguments, 493 CallFunctionFlags flags, 494 int position); 495 496 // An optimized implementation of expressions of the form 497 // x.apply(y, arguments). We call x the applicand and y the receiver. 498 // The optimization avoids allocating an arguments object if possible. 499 void CallApplyLazy(Expression* applicand, 500 Expression* receiver, 501 VariableProxy* arguments, 502 int position); 503 504 void CheckStack(); 505 506 struct InlineRuntimeLUT { 507 void (CodeGenerator::*method)(ZoneList<Expression*>*); 508 const char* name; 509 }; 510 static InlineRuntimeLUT* FindInlineRuntimeLUT(Handle<String> name); 511 bool CheckForInlineRuntimeCall(CallRuntime* node); 512 static bool PatchInlineRuntimeEntry(Handle<String> name, 513 const InlineRuntimeLUT& new_entry, 514 InlineRuntimeLUT* old_entry); 515 void ProcessDeclarations(ZoneList<Declaration*>* declarations); 516 517 static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop); 518 519 // Declare global variables and functions in the given array of 520 // name/value pairs. 521 void DeclareGlobals(Handle<FixedArray> pairs); 522 523 // Instantiate the function boilerplate. 524 void InstantiateBoilerplate(Handle<JSFunction> boilerplate); 525 526 // Support for type checks. 527 void GenerateIsSmi(ZoneList<Expression*>* args); 528 void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args); 529 void GenerateIsArray(ZoneList<Expression*>* args); 530 void GenerateIsRegExp(ZoneList<Expression*>* args); 531 void GenerateIsObject(ZoneList<Expression*>* args); 532 void GenerateIsFunction(ZoneList<Expression*>* args); 533 void GenerateIsUndetectableObject(ZoneList<Expression*>* args); 534 535 // Support for construct call checks. 536 void GenerateIsConstructCall(ZoneList<Expression*>* args); 537 538 // Support for arguments.length and arguments[?]. 539 void GenerateArgumentsLength(ZoneList<Expression*>* args); 540 void GenerateArgumentsAccess(ZoneList<Expression*>* args); 541 542 // Support for accessing the class and value fields of an object. 543 void GenerateClassOf(ZoneList<Expression*>* args); 544 void GenerateValueOf(ZoneList<Expression*>* args); 545 void GenerateSetValueOf(ZoneList<Expression*>* args); 546 547 // Fast support for charCodeAt(n). 548 void GenerateFastCharCodeAt(ZoneList<Expression*>* args); 549 550 // Fast support for object equality testing. 551 void GenerateObjectEquals(ZoneList<Expression*>* args); 552 553 void GenerateLog(ZoneList<Expression*>* args); 554 555 void GenerateGetFramePointer(ZoneList<Expression*>* args); 556 557 // Fast support for Math.random(). 558 void GenerateRandomPositiveSmi(ZoneList<Expression*>* args); 559 560 // Fast support for StringAdd. 561 void GenerateStringAdd(ZoneList<Expression*>* args); 562 563 // Fast support for SubString. 564 void GenerateSubString(ZoneList<Expression*>* args); 565 566 // Fast support for StringCompare. 567 void GenerateStringCompare(ZoneList<Expression*>* args); 568 569 // Support for direct calls from JavaScript to native RegExp code. 570 void GenerateRegExpExec(ZoneList<Expression*>* args); 571 572 // Fast support for number to string. 573 void GenerateNumberToString(ZoneList<Expression*>* args); 574 575 // Fast call to math functions. 576 void GenerateMathSin(ZoneList<Expression*>* args); 577 void GenerateMathCos(ZoneList<Expression*>* args); 578 579 // Simple condition analysis. 580 enum ConditionAnalysis { 581 ALWAYS_TRUE, 582 ALWAYS_FALSE, 583 DONT_KNOW 584 }; 585 ConditionAnalysis AnalyzeCondition(Expression* cond); 586 587 // Methods used to indicate which source code is generated for. Source 588 // positions are collected by the assembler and emitted with the relocation 589 // information. 590 void CodeForFunctionPosition(FunctionLiteral* fun); 591 void CodeForReturnPosition(FunctionLiteral* fun); 592 void CodeForStatementPosition(Statement* node); 593 void CodeForDoWhileConditionPosition(DoWhileStatement* stmt); 594 void CodeForSourcePosition(int pos); 595 596 #ifdef DEBUG 597 // True if the registers are valid for entry to a block. There should 598 // be no frame-external references to (non-reserved) registers. 599 bool HasValidEntryRegisters(); 600 #endif 601 602 ZoneList<DeferredCode*> deferred_; 603 604 // Assembler 605 MacroAssembler* masm_; // to generate code 606 607 CompilationInfo* info_; 608 609 // Code generation state 610 VirtualFrame* frame_; 611 RegisterAllocator* allocator_; 612 CodeGenState* state_; 613 int loop_nesting_; 614 615 // Jump targets. 616 // The target of the return from the function. 617 BreakTarget function_return_; 618 619 // True if the function return is shadowed (ie, jumping to the target 620 // function_return_ does not jump to the true function return, but rather 621 // to some unlinking code). 622 bool function_return_is_shadowed_; 623 624 // True when we are in code that expects the virtual frame to be fully 625 // spilled. Some virtual frame function are disabled in DEBUG builds when 626 // called from spilled code, because they do not leave the virtual frame 627 // in a spilled state. 628 bool in_spilled_code_; 629 630 static InlineRuntimeLUT kInlineRuntimeLUT[]; 631 632 friend class VirtualFrame; 633 friend class JumpTarget; 634 friend class Reference; 635 friend class Result; 636 friend class FastCodeGenerator; 637 friend class FullCodeGenerator; 638 friend class FullCodeGenSyntaxChecker; 639 640 friend class CodeGeneratorPatcher; // Used in test-log-stack-tracer.cc 641 642 DISALLOW_COPY_AND_ASSIGN(CodeGenerator); 643 }; 644 645 646 // Flag that indicates how to generate code for the stub GenericBinaryOpStub. 647 enum GenericBinaryFlags { 648 NO_GENERIC_BINARY_FLAGS = 0, 649 NO_SMI_CODE_IN_STUB = 1 << 0 // Omit smi code in stub. 650 }; 651 652 653 class GenericBinaryOpStub: public CodeStub { 654 public: 655 GenericBinaryOpStub(Token::Value op, 656 OverwriteMode mode, 657 GenericBinaryFlags flags, 658 NumberInfo::Type operands_type = NumberInfo::kUnknown) 659 : op_(op), 660 mode_(mode), 661 flags_(flags), 662 args_in_registers_(false), 663 args_reversed_(false), 664 name_(NULL), 665 operands_type_(operands_type) { 666 use_sse3_ = CpuFeatures::IsSupported(SSE3); 667 ASSERT(OpBits::is_valid(Token::NUM_TOKENS)); 668 } 669 670 // Generate code to call the stub with the supplied arguments. This will add 671 // code at the call site to prepare arguments either in registers or on the 672 // stack together with the actual call. 673 void GenerateCall(MacroAssembler* masm, Register left, Register right); 674 void GenerateCall(MacroAssembler* masm, Register left, Smi* right); 675 void GenerateCall(MacroAssembler* masm, Smi* left, Register right); 676 677 Result GenerateCall(MacroAssembler* masm, 678 VirtualFrame* frame, 679 Result* left, 680 Result* right); 681 682 private: 683 Token::Value op_; 684 OverwriteMode mode_; 685 GenericBinaryFlags flags_; 686 bool args_in_registers_; // Arguments passed in registers not on the stack. 687 bool args_reversed_; // Left and right argument are swapped. 688 bool use_sse3_; 689 char* name_; 690 NumberInfo::Type operands_type_; 691 692 const char* GetName(); 693 694 #ifdef DEBUG 695 void Print() { 696 PrintF("GenericBinaryOpStub %d (op %s), " 697 "(mode %d, flags %d, registers %d, reversed %d, only_numbers %s)\n", 698 MinorKey(), 699 Token::String(op_), 700 static_cast<int>(mode_), 701 static_cast<int>(flags_), 702 static_cast<int>(args_in_registers_), 703 static_cast<int>(args_reversed_), 704 NumberInfo::ToString(operands_type_)); 705 } 706 #endif 707 708 // Minor key encoding in 16 bits NNNFRASOOOOOOOMM. 709 class ModeBits: public BitField<OverwriteMode, 0, 2> {}; 710 class OpBits: public BitField<Token::Value, 2, 7> {}; 711 class SSE3Bits: public BitField<bool, 9, 1> {}; 712 class ArgsInRegistersBits: public BitField<bool, 10, 1> {}; 713 class ArgsReversedBits: public BitField<bool, 11, 1> {}; 714 class FlagBits: public BitField<GenericBinaryFlags, 12, 1> {}; 715 class NumberInfoBits: public BitField<NumberInfo::Type, 13, 3> {}; 716 717 Major MajorKey() { return GenericBinaryOp; } 718 int MinorKey() { 719 // Encode the parameters in a unique 16 bit value. 720 return OpBits::encode(op_) 721 | ModeBits::encode(mode_) 722 | FlagBits::encode(flags_) 723 | SSE3Bits::encode(use_sse3_) 724 | ArgsInRegistersBits::encode(args_in_registers_) 725 | ArgsReversedBits::encode(args_reversed_) 726 | NumberInfoBits::encode(operands_type_); 727 } 728 729 void Generate(MacroAssembler* masm); 730 void GenerateSmiCode(MacroAssembler* masm, Label* slow); 731 void GenerateLoadArguments(MacroAssembler* masm); 732 void GenerateReturn(MacroAssembler* masm); 733 734 bool ArgsInRegistersSupported() { 735 return (op_ == Token::ADD) || (op_ == Token::SUB) 736 || (op_ == Token::MUL) || (op_ == Token::DIV); 737 } 738 bool IsOperationCommutative() { 739 return (op_ == Token::ADD) || (op_ == Token::MUL); 740 } 741 742 void SetArgsInRegisters() { args_in_registers_ = true; } 743 void SetArgsReversed() { args_reversed_ = true; } 744 bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; } 745 bool HasArgsInRegisters() { return args_in_registers_; } 746 bool HasArgsReversed() { return args_reversed_; } 747 }; 748 749 750 class StringStubBase: public CodeStub { 751 public: 752 // Generate code for copying characters using a simple loop. This should only 753 // be used in places where the number of characters is small and the 754 // additional setup and checking in GenerateCopyCharactersREP adds too much 755 // overhead. Copying of overlapping regions is not supported. 756 void GenerateCopyCharacters(MacroAssembler* masm, 757 Register dest, 758 Register src, 759 Register count, 760 bool ascii); 761 762 // Generate code for copying characters using the rep movs instruction. 763 // Copies rcx characters from rsi to rdi. Copying of overlapping regions is 764 // not supported. 765 void GenerateCopyCharactersREP(MacroAssembler* masm, 766 Register dest, // Must be rdi. 767 Register src, // Must be rsi. 768 Register count, // Must be rcx. 769 bool ascii); 770 }; 771 772 773 // Flag that indicates how to generate code for the stub StringAddStub. 774 enum StringAddFlags { 775 NO_STRING_ADD_FLAGS = 0, 776 NO_STRING_CHECK_IN_STUB = 1 << 0 // Omit string check in stub. 777 }; 778 779 780 class StringAddStub: public StringStubBase { 781 public: 782 explicit StringAddStub(StringAddFlags flags) { 783 string_check_ = ((flags & NO_STRING_CHECK_IN_STUB) == 0); 784 } 785 786 private: 787 Major MajorKey() { return StringAdd; } 788 int MinorKey() { return string_check_ ? 0 : 1; } 789 790 void Generate(MacroAssembler* masm); 791 792 // Should the stub check whether arguments are strings? 793 bool string_check_; 794 }; 795 796 797 class SubStringStub: public StringStubBase { 798 public: 799 SubStringStub() {} 800 801 private: 802 Major MajorKey() { return SubString; } 803 int MinorKey() { return 0; } 804 805 void Generate(MacroAssembler* masm); 806 }; 807 808 809 class StringCompareStub: public CodeStub { 810 public: 811 explicit StringCompareStub() {} 812 813 // Compare two flat ascii strings and returns result in rax after popping two 814 // arguments from the stack. 815 static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm, 816 Register left, 817 Register right, 818 Register scratch1, 819 Register scratch2, 820 Register scratch3, 821 Register scratch4); 822 823 private: 824 Major MajorKey() { return StringCompare; } 825 int MinorKey() { return 0; } 826 827 void Generate(MacroAssembler* masm); 828 }; 829 830 831 } } // namespace v8::internal 832 833 #endif // V8_X64_CODEGEN_X64_H_ 834