1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 #include "lithium.h" 30 #include "scopes.h" 31 32 #if V8_TARGET_ARCH_IA32 33 #include "ia32/lithium-ia32.h" 34 #include "ia32/lithium-codegen-ia32.h" 35 #elif V8_TARGET_ARCH_X64 36 #include "x64/lithium-x64.h" 37 #include "x64/lithium-codegen-x64.h" 38 #elif V8_TARGET_ARCH_ARM 39 #include "arm/lithium-arm.h" 40 #include "arm/lithium-codegen-arm.h" 41 #elif V8_TARGET_ARCH_MIPS 42 #include "mips/lithium-mips.h" 43 #include "mips/lithium-codegen-mips.h" 44 #else 45 #error "Unknown architecture." 46 #endif 47 48 namespace v8 { 49 namespace internal { 50 51 52 void LOperand::PrintTo(StringStream* stream) { 53 LUnallocated* unalloc = NULL; 54 switch (kind()) { 55 case INVALID: 56 stream->Add("(0)"); 57 break; 58 case UNALLOCATED: 59 unalloc = LUnallocated::cast(this); 60 stream->Add("v%d", unalloc->virtual_register()); 61 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) { 62 stream->Add("(=%dS)", unalloc->fixed_slot_index()); 63 break; 64 } 65 switch (unalloc->extended_policy()) { 66 case LUnallocated::NONE: 67 break; 68 case LUnallocated::FIXED_REGISTER: { 69 int reg_index = unalloc->fixed_register_index(); 70 const char* register_name = 71 Register::AllocationIndexToString(reg_index); 72 stream->Add("(=%s)", register_name); 73 break; 74 } 75 case LUnallocated::FIXED_DOUBLE_REGISTER: { 76 int reg_index = unalloc->fixed_register_index(); 77 const char* double_register_name = 78 DoubleRegister::AllocationIndexToString(reg_index); 79 stream->Add("(=%s)", double_register_name); 80 break; 81 } 82 case LUnallocated::MUST_HAVE_REGISTER: 83 stream->Add("(R)"); 84 break; 85 case LUnallocated::WRITABLE_REGISTER: 86 stream->Add("(WR)"); 87 break; 88 case LUnallocated::SAME_AS_FIRST_INPUT: 89 stream->Add("(1)"); 90 break; 91 case LUnallocated::ANY: 92 stream->Add("(-)"); 93 break; 94 } 95 break; 96 case CONSTANT_OPERAND: 97 stream->Add("[constant:%d]", index()); 98 break; 99 case STACK_SLOT: 100 stream->Add("[stack:%d]", index()); 101 break; 102 case DOUBLE_STACK_SLOT: 103 stream->Add("[double_stack:%d]", index()); 104 break; 105 case REGISTER: 106 stream->Add("[%s|R]", Register::AllocationIndexToString(index())); 107 break; 108 case DOUBLE_REGISTER: 109 stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index())); 110 break; 111 case ARGUMENT: 112 stream->Add("[arg:%d]", index()); 113 break; 114 } 115 } 116 117 #define DEFINE_OPERAND_CACHE(name, type) \ 118 L##name* L##name::cache = NULL; \ 119 \ 120 void L##name::SetUpCache() { \ 121 if (cache) return; \ 122 cache = new L##name[kNumCachedOperands]; \ 123 for (int i = 0; i < kNumCachedOperands; i++) { \ 124 cache[i].ConvertTo(type, i); \ 125 } \ 126 } \ 127 \ 128 void L##name::TearDownCache() { \ 129 delete[] cache; \ 130 } 131 132 LITHIUM_OPERAND_LIST(DEFINE_OPERAND_CACHE) 133 #undef DEFINE_OPERAND_CACHE 134 135 void LOperand::SetUpCaches() { 136 #define LITHIUM_OPERAND_SETUP(name, type) L##name::SetUpCache(); 137 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP) 138 #undef LITHIUM_OPERAND_SETUP 139 } 140 141 142 void LOperand::TearDownCaches() { 143 #define LITHIUM_OPERAND_TEARDOWN(name, type) L##name::TearDownCache(); 144 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN) 145 #undef LITHIUM_OPERAND_TEARDOWN 146 } 147 148 149 bool LParallelMove::IsRedundant() const { 150 for (int i = 0; i < move_operands_.length(); ++i) { 151 if (!move_operands_[i].IsRedundant()) return false; 152 } 153 return true; 154 } 155 156 157 void LParallelMove::PrintDataTo(StringStream* stream) const { 158 bool first = true; 159 for (int i = 0; i < move_operands_.length(); ++i) { 160 if (!move_operands_[i].IsEliminated()) { 161 LOperand* source = move_operands_[i].source(); 162 LOperand* destination = move_operands_[i].destination(); 163 if (!first) stream->Add(" "); 164 first = false; 165 if (source->Equals(destination)) { 166 destination->PrintTo(stream); 167 } else { 168 destination->PrintTo(stream); 169 stream->Add(" = "); 170 source->PrintTo(stream); 171 } 172 stream->Add(";"); 173 } 174 } 175 } 176 177 178 void LEnvironment::PrintTo(StringStream* stream) { 179 stream->Add("[id=%d|", ast_id().ToInt()); 180 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) { 181 stream->Add("deopt_id=%d|", deoptimization_index()); 182 } 183 stream->Add("parameters=%d|", parameter_count()); 184 stream->Add("arguments_stack_height=%d|", arguments_stack_height()); 185 for (int i = 0; i < values_.length(); ++i) { 186 if (i != 0) stream->Add(";"); 187 if (values_[i] == NULL) { 188 stream->Add("[hole]"); 189 } else { 190 values_[i]->PrintTo(stream); 191 } 192 } 193 stream->Add("]"); 194 } 195 196 197 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) { 198 // Do not record arguments as pointers. 199 if (op->IsStackSlot() && op->index() < 0) return; 200 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot()); 201 pointer_operands_.Add(op, zone); 202 } 203 204 205 void LPointerMap::RemovePointer(LOperand* op) { 206 // Do not record arguments as pointers. 207 if (op->IsStackSlot() && op->index() < 0) return; 208 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot()); 209 for (int i = 0; i < pointer_operands_.length(); ++i) { 210 if (pointer_operands_[i]->Equals(op)) { 211 pointer_operands_.Remove(i); 212 --i; 213 } 214 } 215 } 216 217 218 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) { 219 // Do not record arguments as pointers. 220 if (op->IsStackSlot() && op->index() < 0) return; 221 ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot()); 222 untagged_operands_.Add(op, zone); 223 } 224 225 226 void LPointerMap::PrintTo(StringStream* stream) { 227 stream->Add("{"); 228 for (int i = 0; i < pointer_operands_.length(); ++i) { 229 if (i != 0) stream->Add(";"); 230 pointer_operands_[i]->PrintTo(stream); 231 } 232 stream->Add("}"); 233 } 234 235 236 int StackSlotOffset(int index) { 237 if (index >= 0) { 238 // Local or spill slot. Skip the frame pointer, function, and 239 // context in the fixed part of the frame. 240 return -(index + 1) * kPointerSize - 241 StandardFrameConstants::kFixedFrameSizeFromFp; 242 } else { 243 // Incoming parameter. Skip the return address. 244 return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize; 245 } 246 } 247 248 249 LChunk::LChunk(CompilationInfo* info, HGraph* graph) 250 : spill_slot_count_(0), 251 info_(info), 252 graph_(graph), 253 instructions_(32, graph->zone()), 254 pointer_maps_(8, graph->zone()), 255 inlined_closures_(1, graph->zone()) { 256 } 257 258 259 LLabel* LChunk::GetLabel(int block_id) const { 260 HBasicBlock* block = graph_->blocks()->at(block_id); 261 int first_instruction = block->first_instruction_index(); 262 return LLabel::cast(instructions_[first_instruction]); 263 } 264 265 266 int LChunk::LookupDestination(int block_id) const { 267 LLabel* cur = GetLabel(block_id); 268 while (cur->replacement() != NULL) { 269 cur = cur->replacement(); 270 } 271 return cur->block_id(); 272 } 273 274 Label* LChunk::GetAssemblyLabel(int block_id) const { 275 LLabel* label = GetLabel(block_id); 276 ASSERT(!label->HasReplacement()); 277 return label->label(); 278 } 279 280 281 void LChunk::MarkEmptyBlocks() { 282 LPhase phase("L_Mark empty blocks", this); 283 for (int i = 0; i < graph()->blocks()->length(); ++i) { 284 HBasicBlock* block = graph()->blocks()->at(i); 285 int first = block->first_instruction_index(); 286 int last = block->last_instruction_index(); 287 LInstruction* first_instr = instructions()->at(first); 288 LInstruction* last_instr = instructions()->at(last); 289 290 LLabel* label = LLabel::cast(first_instr); 291 if (last_instr->IsGoto()) { 292 LGoto* goto_instr = LGoto::cast(last_instr); 293 if (label->IsRedundant() && 294 !label->is_loop_header()) { 295 bool can_eliminate = true; 296 for (int i = first + 1; i < last && can_eliminate; ++i) { 297 LInstruction* cur = instructions()->at(i); 298 if (cur->IsGap()) { 299 LGap* gap = LGap::cast(cur); 300 if (!gap->IsRedundant()) { 301 can_eliminate = false; 302 } 303 } else { 304 can_eliminate = false; 305 } 306 } 307 if (can_eliminate) { 308 label->set_replacement(GetLabel(goto_instr->block_id())); 309 } 310 } 311 } 312 } 313 } 314 315 316 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) { 317 LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block); 318 gap->set_hydrogen_value(instr->hydrogen_value()); 319 int index = -1; 320 if (instr->IsControl()) { 321 instructions_.Add(gap, zone()); 322 index = instructions_.length(); 323 instructions_.Add(instr, zone()); 324 } else { 325 index = instructions_.length(); 326 instructions_.Add(instr, zone()); 327 instructions_.Add(gap, zone()); 328 } 329 if (instr->HasPointerMap()) { 330 pointer_maps_.Add(instr->pointer_map(), zone()); 331 instr->pointer_map()->set_lithium_position(index); 332 } 333 } 334 335 336 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) { 337 return LConstantOperand::Create(constant->id(), zone()); 338 } 339 340 341 int LChunk::GetParameterStackSlot(int index) const { 342 // The receiver is at index 0, the first parameter at index 1, so we 343 // shift all parameter indexes down by the number of parameters, and 344 // make sure they end up negative so they are distinguishable from 345 // spill slots. 346 int result = index - info()->num_parameters() - 1; 347 348 ASSERT(result < 0); 349 return result; 350 } 351 352 353 // A parameter relative to ebp in the arguments stub. 354 int LChunk::ParameterAt(int index) { 355 ASSERT(-1 <= index); // -1 is the receiver. 356 return (1 + info()->scope()->num_parameters() - index) * 357 kPointerSize; 358 } 359 360 361 LGap* LChunk::GetGapAt(int index) const { 362 return LGap::cast(instructions_[index]); 363 } 364 365 366 bool LChunk::IsGapAt(int index) const { 367 return instructions_[index]->IsGap(); 368 } 369 370 371 int LChunk::NearestGapPos(int index) const { 372 while (!IsGapAt(index)) index--; 373 return index; 374 } 375 376 377 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) { 378 GetGapAt(index)->GetOrCreateParallelMove( 379 LGap::START, zone())->AddMove(from, to, zone()); 380 } 381 382 383 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const { 384 return HConstant::cast(graph_->LookupValue(operand->index())); 385 } 386 387 388 Representation LChunk::LookupLiteralRepresentation( 389 LConstantOperand* operand) const { 390 return graph_->LookupValue(operand->index())->representation(); 391 } 392 393 394 LChunk* LChunk::NewChunk(HGraph* graph) { 395 DisallowHandleAllocation no_handles; 396 DisallowHeapAllocation no_gc; 397 graph->DisallowAddingNewValues(); 398 int values = graph->GetMaximumValueID(); 399 CompilationInfo* info = graph->info(); 400 if (values > LUnallocated::kMaxVirtualRegisters) { 401 info->set_bailout_reason(kNotEnoughVirtualRegistersForValues); 402 return NULL; 403 } 404 LAllocator allocator(values, graph); 405 LChunkBuilder builder(info, graph, &allocator); 406 LChunk* chunk = builder.Build(); 407 if (chunk == NULL) return NULL; 408 409 if (!allocator.Allocate(chunk)) { 410 info->set_bailout_reason(kNotEnoughVirtualRegistersRegalloc); 411 return NULL; 412 } 413 414 chunk->set_allocated_double_registers( 415 allocator.assigned_double_registers()); 416 417 return chunk; 418 } 419 420 421 Handle<Code> LChunk::Codegen() { 422 MacroAssembler assembler(info()->isolate(), NULL, 0); 423 LOG_CODE_EVENT(info()->isolate(), 424 CodeStartLinePosInfoRecordEvent( 425 assembler.positions_recorder())); 426 LCodeGen generator(this, &assembler, info()); 427 428 MarkEmptyBlocks(); 429 430 if (generator.GenerateCode()) { 431 CodeGenerator::MakeCodePrologue(info(), "optimized"); 432 Code::Flags flags = info()->flags(); 433 Handle<Code> code = 434 CodeGenerator::MakeCodeEpilogue(&assembler, flags, info()); 435 generator.FinishCode(code); 436 code->set_is_crankshafted(true); 437 void* jit_handler_data = 438 assembler.positions_recorder()->DetachJITHandlerData(); 439 LOG_CODE_EVENT(info()->isolate(), 440 CodeEndLinePosInfoRecordEvent(*code, jit_handler_data)); 441 442 CodeGenerator::PrintCode(code, info()); 443 return code; 444 } 445 return Handle<Code>::null(); 446 } 447 448 449 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) { 450 allocated_double_registers_ = allocated_registers; 451 BitVector* doubles = allocated_double_registers(); 452 BitVector::Iterator iterator(doubles); 453 while (!iterator.Done()) { 454 if (info()->saves_caller_doubles()) { 455 if (kDoubleSize == kPointerSize * 2) { 456 spill_slot_count_ += 2; 457 } else { 458 spill_slot_count_++; 459 } 460 } 461 iterator.Advance(); 462 } 463 } 464 465 466 LInstruction* LChunkBuilder::CheckElideControlInstruction( 467 HControlInstruction* instr) { 468 HBasicBlock* successor; 469 if (!instr->KnownSuccessorBlock(&successor)) return NULL; 470 return new(zone()) LGoto(successor); 471 } 472 473 474 LPhase::~LPhase() { 475 if (ShouldProduceTraceOutput()) { 476 isolate()->GetHTracer()->TraceLithium(name(), chunk_); 477 } 478 } 479 480 481 } } // namespace v8::internal 482